You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ya...@apache.org on 2023/01/05 05:59:43 UTC
[kyuubi] branch master updated: [KYUUBI #4084] [Authz] Implicitly extracting name of Extractors and OperationType in spec file generation preventing use class name String directly
This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new b346fb084 [KYUUBI #4084] [Authz] Implicitly extracting name of Extractors and OperationType in spec file generation preventing use class name String directly
b346fb084 is described below
commit b346fb0845845707f19ead5b48582f0959e80427
Author: liangbowen <li...@gf.com.cn>
AuthorDate: Thu Jan 5 13:59:33 2023 +0800
[KYUUBI #4084] [Authz] Implicitly extracting name of Extractors and OperationType in spec file generation preventing use class name String directly
### _Why are the changes needed?_
- add implicit methods `classSimpleName` and `operationTypeStr` to package object `org.apache.kyuubi.plugin.spark.authz` in test sources
- increase compilation alignment to extractor itself, and easier use for future third party adaptation avoiding calling wrong `getName` of the calss
- preventing use String or class name as magic value directly which easily causes mistakes
- no changes with spec JSON file regeneration after this PR
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
Closes #4084 from bowenliang123/authz-class-simplename.
Closes #4084
54b831002 [liangbowen] simplify classSimpleName
1e191d51a [liangbowen] apply operationTypeStr to TableCommands
a1ea7a4e5 [liangbowen] apply operationTypeStr to DatabaseCommands
be6e7f2e3 [liangbowen] use classSimpleName implicitly
88332ff01 [liangbowen] make operationTypeStr method implicit
7eb023491 [liangbowen] add operationTypeStr method for extracting OperationType name
86bc45274 [liangbowen] replace AuthZUtils's extractorKey method by adding implicit classSimpleName methond in package object of `org.apache.kyuubi.plugin.spark.authz`
eece16ce8 [liangbowen] rename method to `extractorKey`. add method comments.
b36382c4c [liangbowen] refactor and rename to extractorName
0dd2b1043 [liangbowen] apply getClassSimpleName to DatabaseCommands
bd3ea8aa3 [liangbowen] apply getClassSimpleName to IcebergCommands
1026fbdcb [liangbowen] apply getClassSimpleName to TableCommands
9b06872f4 [liangbowen] apply getClassSimpleName to FunctionCommands
cb8494aef [liangbowen] apply getClassSimpleName to IcebergCommands
b787e72ee [liangbowen] apply getClassSimpleName to Scans
3ab171f18 [liangbowen] add getClassSimpleName method in AuthZUtils for getting class's simple name
Authored-by: liangbowen <li...@gf.com.cn>
Signed-off-by: Kent Yao <ya...@apache.org>
---
.../plugin/spark/authz/gen/DatabaseCommands.scala | 78 +++++---
.../plugin/spark/authz/gen/FunctionCommands.scala | 34 ++--
.../plugin/spark/authz/gen/IcebergCommands.scala | 4 +-
.../kyuubi/plugin/spark/authz/gen/Scans.scala | 10 +-
.../plugin/spark/authz/gen/TableCommands.scala | 208 +++++++++++----------
.../kyuubi/plugin/spark/authz/gen/package.scala | 28 +++
6 files changed, 214 insertions(+), 148 deletions(-)
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
index 65dc4c240..09ff916a0 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
@@ -17,116 +17,132 @@
package org.apache.kyuubi.plugin.spark.authz.gen
-import org.apache.kyuubi.plugin.spark.authz.serde.{CatalogDesc, DatabaseCommandSpec, DatabaseDesc}
+import org.apache.kyuubi.plugin.spark.authz.OperationType._
+import org.apache.kyuubi.plugin.spark.authz.serde._
object DatabaseCommands {
val AlterDatabaseProperties = {
DatabaseCommandSpec(
"org.apache.spark.sql.execution.command.AlterDatabasePropertiesCommand",
- Seq(DatabaseDesc("databaseName", "StringDatabaseExtractor")),
- "ALTERDATABASE")
+ Seq(DatabaseDesc("databaseName", classOf[StringDatabaseExtractor])),
+ ALTERDATABASE)
}
val CommentOnNamespace = {
DatabaseCommandSpec(
"org.apache.spark.sql.catalyst.plans.logical.CommentOnNamespace",
- Seq(DatabaseDesc("child", "ResolvedNamespaceDatabaseExtractor")),
- "ALTERDATABASE")
+ Seq(DatabaseDesc("child", classOf[ResolvedNamespaceDatabaseExtractor])),
+ ALTERDATABASE)
}
val SetNamespaceProperties = {
DatabaseCommandSpec(
"org.apache.spark.sql.catalyst.plans.logical.SetNamespaceProperties",
- Seq(DatabaseDesc("namespace", "ResolvedNamespaceDatabaseExtractor")),
- "ALTERDATABASE")
+ Seq(DatabaseDesc("namespace", classOf[ResolvedNamespaceDatabaseExtractor])),
+ ALTERDATABASE)
}
val SetNamespaceLocation = {
DatabaseCommandSpec(
"org.apache.spark.sql.catalyst.plans.logical.SetNamespaceLocation",
- Seq(DatabaseDesc("namespace", "ResolvedNamespaceDatabaseExtractor")),
- "ALTERDATABASE_LOCATION")
+ Seq(DatabaseDesc("namespace", classOf[ResolvedNamespaceDatabaseExtractor])),
+ ALTERDATABASE_LOCATION)
}
val CreateNamespace = {
- val databaseDesc1 = DatabaseDesc("name", "ResolvedDBObjectNameDatabaseExtractor")
+ val databaseDesc1 =
+ DatabaseDesc("name", classOf[ResolvedDBObjectNameDatabaseExtractor])
val databaseDesc2 =
- DatabaseDesc("namespace", "StringSeqDatabaseExtractor", catalogDesc = Some(CatalogDesc()))
+ DatabaseDesc(
+ "namespace",
+ classOf[StringSeqDatabaseExtractor],
+ catalogDesc = Some(CatalogDesc()))
DatabaseCommandSpec(
"org.apache.spark.sql.catalyst.plans.logical.CreateNamespace",
Seq(databaseDesc1, databaseDesc2),
- "CREATEDATABASE")
+ CREATEDATABASE)
}
val DropNamespace = {
DatabaseCommandSpec(
"org.apache.spark.sql.catalyst.plans.logical.DropNamespace",
- Seq(DatabaseDesc("namespace", "ResolvedNamespaceDatabaseExtractor")),
- "DROPDATABASE")
+ Seq(DatabaseDesc("namespace", classOf[ResolvedNamespaceDatabaseExtractor])),
+ DROPDATABASE)
}
val AnalyzeTables = {
DatabaseCommandSpec(
"org.apache.spark.sql.execution.command.AnalyzeTablesCommand",
- Seq(DatabaseDesc("databaseName", "StringOptionDatabaseExtractor", isInput = true)),
- "ANALYZE_TABLE")
+ Seq(DatabaseDesc(
+ "databaseName",
+ classOf[StringOptionDatabaseExtractor],
+ isInput = true)),
+ ANALYZE_TABLE)
}
val SetDatabase = {
val cmd = "org.apache.spark.sql.execution.command.SetDatabaseCommand"
- val databaseDesc = DatabaseDesc("databaseName", "StringDatabaseExtractor", isInput = true)
- DatabaseCommandSpec(cmd, Seq(databaseDesc), "SWITCHDATABASE")
+ val databaseDesc =
+ DatabaseDesc("databaseName", classOf[StringDatabaseExtractor], isInput = true)
+ DatabaseCommandSpec(cmd, Seq(databaseDesc), SWITCHDATABASE)
}
val DescribeDatabase = {
val cmd = "org.apache.spark.sql.execution.command.DescribeDatabaseCommand"
- val databaseDesc = DatabaseDesc("databaseName", "StringDatabaseExtractor", isInput = true)
- DatabaseCommandSpec(cmd, Seq(databaseDesc), "DESCDATABASE")
+ val databaseDesc =
+ DatabaseDesc("databaseName", classOf[StringDatabaseExtractor], isInput = true)
+ DatabaseCommandSpec(cmd, Seq(databaseDesc), DESCDATABASE)
}
val SetCatalogAndNamespace = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.SetCatalogAndNamespace"
val databaseDesc1 =
- DatabaseDesc("child", "ResolvedDBObjectNameDatabaseExtractor", isInput = true)
+ DatabaseDesc(
+ "child",
+ classOf[ResolvedDBObjectNameDatabaseExtractor],
+ isInput = true)
val databaseDesc2 =
DatabaseDesc(
"namespace",
- "StringSeqOptionDatabaseExtractor",
+ classOf[StringSeqOptionDatabaseExtractor],
catalogDesc = Some(CatalogDesc(
fieldName = "catalogName",
- fieldExtractor = "StringOptionCatalogExtractor")),
+ fieldExtractor = classOf[StringOptionCatalogExtractor])),
isInput = true)
- DatabaseCommandSpec(cmd, Seq(databaseDesc1, databaseDesc2), "SWITCHDATABASE")
+ DatabaseCommandSpec(cmd, Seq(databaseDesc1, databaseDesc2), SWITCHDATABASE)
}
val SetNamespace = {
val cmd = "org.apache.spark.sql.execution.command.SetNamespaceCommand"
val databaseDesc = DatabaseDesc(
"namespace",
- "StringSeqDatabaseExtractor",
+ classOf[StringSeqDatabaseExtractor],
isInput = true)
- DatabaseCommandSpec(cmd, Seq(databaseDesc), "SWITCHDATABASE")
+ DatabaseCommandSpec(cmd, Seq(databaseDesc), SWITCHDATABASE)
}
val DescribeNamespace = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.DescribeNamespace"
val databaseDesc =
- DatabaseDesc("namespace", "ResolvedNamespaceDatabaseExtractor", isInput = true)
- DatabaseCommandSpec(cmd, Seq(databaseDesc), "DESCDATABASE")
+ DatabaseDesc(
+ "namespace",
+ classOf[ResolvedNamespaceDatabaseExtractor],
+ isInput = true)
+ DatabaseCommandSpec(cmd, Seq(databaseDesc), DESCDATABASE)
}
val data = Array(
AlterDatabaseProperties,
AlterDatabaseProperties.copy(
classname = "org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
- opType = "ALTERDATABASE_LOCATION"),
+ opType = ALTERDATABASE_LOCATION),
AlterDatabaseProperties.copy(
classname = "org.apache.spark.sql.execution.command.CreateDatabaseCommand",
- opType = "CREATEDATABASE"),
+ opType = CREATEDATABASE),
AlterDatabaseProperties.copy(
classname = "org.apache.spark.sql.execution.command.DropDatabaseCommand",
- opType = "DROPDATABASE"),
+ opType = DROPDATABASE),
AnalyzeTables,
CreateNamespace,
CommentOnNamespace,
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
index 7afa2df8c..590ad1786 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
@@ -17,7 +17,8 @@
package org.apache.kyuubi.plugin.spark.authz.gen
-import org.apache.kyuubi.plugin.spark.authz.serde.{DatabaseDesc, FunctionCommandSpec, FunctionDesc, FunctionTypeDesc, StringOptionDatabaseExtractor, TempMarkerFunctionTypeExtractor}
+import org.apache.kyuubi.plugin.spark.authz.OperationType._
+import org.apache.kyuubi.plugin.spark.authz.serde._
object FunctionCommands {
@@ -25,51 +26,56 @@ object FunctionCommands {
val cmd = "org.apache.spark.sql.execution.command.CreateFunctionCommand"
val functionTypeDesc = FunctionTypeDesc(
"isTemp",
- classOf[TempMarkerFunctionTypeExtractor].getSimpleName,
+ classOf[TempMarkerFunctionTypeExtractor],
Seq("TEMP"))
val databaseDesc =
- DatabaseDesc("databaseName", classOf[StringOptionDatabaseExtractor].getSimpleName)
+ DatabaseDesc("databaseName", classOf[StringOptionDatabaseExtractor])
val functionDesc = FunctionDesc(
"functionName",
- "StringFunctionExtractor",
+ classOf[StringFunctionExtractor],
Some(databaseDesc),
Some(functionTypeDesc))
- FunctionCommandSpec(cmd, Seq(functionDesc), "CREATEFUNCTION")
+ FunctionCommandSpec(cmd, Seq(functionDesc), CREATEFUNCTION)
}
val DescribeFunction = {
val cmd = "org.apache.spark.sql.execution.command.DescribeFunctionCommand"
val skips = Seq("TEMP", "SYSTEM")
- val functionTypeDesc1 = FunctionTypeDesc("info", "ExpressionInfoFunctionTypeExtractor", skips)
+ val functionTypeDesc1 =
+ FunctionTypeDesc("info", classOf[ExpressionInfoFunctionTypeExtractor], skips)
val functionDesc1 = FunctionDesc(
"info",
- "ExpressionInfoFunctionExtractor",
+ classOf[ExpressionInfoFunctionExtractor],
functionTypeDesc = Some(functionTypeDesc1),
isInput = true)
val functionTypeDesc2 =
- FunctionTypeDesc("functionName", "FunctionIdentifierFunctionTypeExtractor", skips)
+ FunctionTypeDesc(
+ "functionName",
+ classOf[FunctionIdentifierFunctionTypeExtractor],
+ skips)
val functionDesc2 = FunctionDesc(
"functionName",
- "FunctionIdentifierFunctionExtractor",
+ classOf[FunctionIdentifierFunctionExtractor],
functionTypeDesc = Some(functionTypeDesc2),
isInput = true)
- FunctionCommandSpec(cmd, Seq(functionDesc1, functionDesc2), "DESCFUNCTION")
+ FunctionCommandSpec(cmd, Seq(functionDesc1, functionDesc2), DESCFUNCTION)
}
val DropFunction = {
val cmd = "org.apache.spark.sql.execution.command.DropFunctionCommand"
- CreateFunction.copy(cmd, opType = "DROPFUNCTION")
+ CreateFunction.copy(cmd, opType = DROPFUNCTION)
}
val RefreshFunction = {
val cmd = "org.apache.spark.sql.execution.command.RefreshFunctionCommand"
- val databaseDesc = DatabaseDesc("databaseName", "StringOptionDatabaseExtractor")
+ val databaseDesc =
+ DatabaseDesc("databaseName", classOf[StringOptionDatabaseExtractor])
val functionDesc = FunctionDesc(
"functionName",
- "StringFunctionExtractor",
+ classOf[StringFunctionExtractor],
Some(databaseDesc))
- FunctionCommandSpec(cmd, Seq(functionDesc), "RELOADFUNCTION")
+ FunctionCommandSpec(cmd, Seq(functionDesc), RELOADFUNCTION)
}
val data = Array(
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
index 4c4eda30a..f316daa90 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
@@ -27,7 +27,7 @@ object IcebergCommands {
val tableDesc =
TableDesc(
"table",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
@@ -42,7 +42,7 @@ object IcebergCommands {
val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
val tableDesc = TableDesc(
"targetTable",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
val queryDesc = QueryDesc("sourceTable")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
index c2ca306ae..56a3a5819 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
@@ -17,7 +17,7 @@
package org.apache.kyuubi.plugin.spark.authz.gen
-import org.apache.kyuubi.plugin.spark.authz.serde.{CatalogTableOptionTableExtractor, CatalogTableTableExtractor, DataSourceV2RelationTableExtractor, ScanDesc, ScanSpec}
+import org.apache.kyuubi.plugin.spark.authz.serde._
object Scans {
@@ -26,7 +26,7 @@ object Scans {
val tableDesc =
ScanDesc(
"tableMeta",
- classOf[CatalogTableTableExtractor].getSimpleName)
+ classOf[CatalogTableTableExtractor])
ScanSpec(r, Seq(tableDesc))
}
@@ -35,7 +35,7 @@ object Scans {
val tableDesc =
ScanDesc(
"catalogTable",
- classOf[CatalogTableOptionTableExtractor].getSimpleName)
+ classOf[CatalogTableOptionTableExtractor])
ScanSpec(r, Seq(tableDesc))
}
@@ -44,7 +44,7 @@ object Scans {
val tableDesc =
ScanDesc(
null,
- classOf[DataSourceV2RelationTableExtractor].getSimpleName)
+ classOf[DataSourceV2RelationTableExtractor])
ScanSpec(r, Seq(tableDesc))
}
@@ -53,7 +53,7 @@ object Scans {
val tableDesc =
ScanDesc(
"catalogTable",
- classOf[CatalogTableTableExtractor].getSimpleName)
+ classOf[CatalogTableTableExtractor])
ScanSpec(r, Seq(tableDesc))
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 93eb28bfb..fd5c2574e 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -17,35 +17,37 @@
package org.apache.kyuubi.plugin.spark.authz.gen
+import org.apache.kyuubi.plugin.spark.authz.OperationType._
import org.apache.kyuubi.plugin.spark.authz.serde._
object TableCommands {
// table extractors
- val tite = classOf[TableIdentifierTableExtractor].getSimpleName
+ val tite = classOf[TableIdentifierTableExtractor]
val tableNameDesc = TableDesc("tableName", tite)
val tableIdentDesc = TableDesc("tableIdent", tite)
- val resolvedTableDesc = TableDesc("child", "ResolvedTableTableExtractor")
- val resolvedDbObjectNameDesc = TableDesc("child", "ResolvedDbObjectNameTableExtractor")
+ val resolvedTableDesc = TableDesc("child", classOf[ResolvedTableTableExtractor])
+ val resolvedDbObjectNameDesc =
+ TableDesc("child", classOf[ResolvedDbObjectNameTableExtractor])
val overwriteActionTypeDesc =
- ActionTypeDesc("overwrite", "OverwriteOrInsertActionTypeExtractor")
+ ActionTypeDesc("overwrite", classOf[OverwriteOrInsertActionTypeExtractor])
val AlterTable = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.AlterTable"
- val tableDesc = TableDesc("ident", classOf[IdentifierTableExtractor].getSimpleName)
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_PROPERTIES")
+ val tableDesc = TableDesc("ident", classOf[IdentifierTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}
val AlterTableAddColumns = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableAddColumnsCommand"
- val columnDesc = ColumnDesc("colsToAdd", "StructFieldSeqColumnExtractor")
+ val columnDesc = ColumnDesc("colsToAdd", classOf[StructFieldSeqColumnExtractor])
val tableDesc = TableDesc("table", tite, Some(columnDesc))
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_ADDCOLS")
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_ADDCOLS)
}
val AddColumns = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.AddColumns"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "ALTERTABLE_ADDCOLS")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS)
}
val AlterColumn = {
@@ -60,39 +62,40 @@ object TableCommands {
val ReplaceColumns = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.ReplaceColumns"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "ALTERTABLE_REPLACECOLS")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_REPLACECOLS)
}
val RenameColumn = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.RenameColumn"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "ALTERTABLE_RENAMECOL")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_RENAMECOL)
}
val AlterTableAddPartition = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableAddPartitionCommand"
- val columnDesc = ColumnDesc("partitionSpecsAndLocs", "PartitionLocsSeqColumnExtractor")
+ val columnDesc =
+ ColumnDesc("partitionSpecsAndLocs", classOf[PartitionLocsSeqColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_ADDPARTS")
+ ALTERTABLE_ADDPARTS)
}
val AlterTableChangeColumn = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand"
- val columnDesc = ColumnDesc("columnName", "StringColumnExtractor")
+ val columnDesc = ColumnDesc("columnName", classOf[StringColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_REPLACECOLS")
+ ALTERTABLE_REPLACECOLS)
}
val AlterTableDropPartition = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableDropPartitionCommand"
- val columnDesc = ColumnDesc("specs", "PartitionSeqColumnExtractor")
+ val columnDesc = ColumnDesc("specs", classOf[PartitionSeqColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_DROPPARTS")
+ ALTERTABLE_DROPPARTS)
}
val AlterTableRename = {
@@ -100,7 +103,10 @@ object TableCommands {
val actionTypeDesc = ActionTypeDesc(null, null, Some("DELETE"))
val oldTableTableTypeDesc =
- TableTypeDesc("oldName", "TableIdentifierTableTypeExtractor", Seq("TEMP_VIEW"))
+ TableTypeDesc(
+ "oldName",
+ classOf[TableIdentifierTableTypeExtractor],
+ Seq("TEMP_VIEW"))
val oldTableD = TableDesc(
"oldName",
tite,
@@ -109,82 +115,82 @@ object TableCommands {
val newTableD =
TableDesc("newName", tite, tableTypeDesc = Some(oldTableTableTypeDesc))
- TableCommandSpec(cmd, Seq(oldTableD, newTableD), "ALTERTABLE_RENAME")
+ TableCommandSpec(cmd, Seq(oldTableD, newTableD), ALTERTABLE_RENAME)
}
// this is for spark 3.1 or below
val AlterTableRecoverPartitions = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableRecoverPartitionsCommand"
- TableCommandSpec(cmd, Seq(tableNameDesc), "MSCK")
+ TableCommandSpec(cmd, Seq(tableNameDesc), MSCK)
}
val RepairTable = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.RepairTable"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "MSCK")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), MSCK)
}
val AlterTableRenamePartition = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableRenamePartitionCommand"
- val columnDesc = ColumnDesc("oldPartition", "PartitionColumnExtractor")
+ val columnDesc = ColumnDesc("oldPartition", classOf[PartitionColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_RENAMEPART")
+ ALTERTABLE_RENAMEPART)
}
val AlterTableSerDeProperties = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableSerDePropertiesCommand"
- val columnDesc = ColumnDesc("partSpec", "PartitionOptionColumnExtractor")
+ val columnDesc = ColumnDesc("partSpec", classOf[PartitionOptionColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_SERDEPROPERTIES")
+ ALTERTABLE_SERDEPROPERTIES)
}
val AlterTableSetLocation = {
val cmd = "org.apache.spark.sql.execution.command.AlterTableSetLocationCommand"
- val columnDesc = ColumnDesc("partitionSpec", "PartitionOptionColumnExtractor")
+ val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionOptionColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
- "ALTERTABLE_LOCATION")
+ ALTERTABLE_LOCATION)
}
val AlterTableSetProperties = TableCommandSpec(
"org.apache.spark.sql.execution.command.AlterTableSetPropertiesCommand",
Seq(tableNameDesc),
- "ALTERTABLE_PROPERTIES")
+ ALTERTABLE_PROPERTIES)
val AlterTableUnsetProperties = AlterTableSetProperties.copy(classname =
"org.apache.spark.sql.execution.command.AlterTableUnsetPropertiesCommand")
val AlterViewAs = {
val tableTypeDesc =
- TableTypeDesc("name", "TableIdentifierTableTypeExtractor", Seq("TEMP_VIEW"))
+ TableTypeDesc("name", classOf[TableIdentifierTableTypeExtractor], Seq("TEMP_VIEW"))
TableCommandSpec(
"org.apache.spark.sql.execution.command.AlterViewAsCommand",
Seq(TableDesc("name", tite, tableTypeDesc = Some(tableTypeDesc))),
- "ALTERVIEW_AS",
+ ALTERVIEW_AS,
Seq(QueryDesc("query")))
}
val AnalyzeColumn = {
val cmd = "org.apache.spark.sql.execution.command.AnalyzeColumnCommand"
- val cd1 = ColumnDesc("columnNames", "StringSeqColumnExtractor")
- val cd2 = cd1.copy(fieldExtractor = "StringSeqOptionColumnExtractor")
+ val cd1 = ColumnDesc("columnNames", classOf[StringSeqColumnExtractor])
+ val cd2 = cd1.copy(fieldExtractor = classOf[StringSeqOptionColumnExtractor])
val td1 = tableIdentDesc.copy(columnDesc = Some(cd1), isInput = true)
val td2 = td1.copy(columnDesc = Some(cd2))
- TableCommandSpec(cmd, Seq(td1, td2), "ANALYZE_TABLE")
+ TableCommandSpec(cmd, Seq(td1, td2), ANALYZE_TABLE)
}
val AnalyzePartition = {
val cmd = "org.apache.spark.sql.execution.command.AnalyzePartitionCommand"
- val columnDesc = ColumnDesc("partitionSpec", "PartitionColumnExtractor")
+ val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableIdentDesc.copy(columnDesc = Some(columnDesc), isInput = true)),
- "ANALYZE_TABLE")
+ ANALYZE_TABLE)
}
val AnalyzeTable = {
@@ -192,43 +198,43 @@ object TableCommands {
TableCommandSpec(
cmd,
Seq(tableIdentDesc.copy(isInput = true)),
- "ANALYZE_TABLE")
+ ANALYZE_TABLE)
}
val CreateTableV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CreateTable"
val tableDesc = TableDesc(
"tableName",
- "IdentifierTableExtractor",
+ classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
- TableCommandSpec(cmd, Seq(tableDesc, resolvedDbObjectNameDesc), "CREATETABLE")
+ TableCommandSpec(cmd, Seq(tableDesc, resolvedDbObjectNameDesc), CREATETABLE)
}
val CreateV2Table = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CreateV2Table"
val tableDesc = TableDesc(
"tableName",
- "IdentifierTableExtractor",
+ classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
- TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE")
+ TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
}
val CreateTableAsSelectV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CreateTableAsSelect"
val tableDesc = TableDesc(
"tableName",
- "IdentifierTableExtractor",
+ classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
TableCommandSpec(
cmd,
Seq(tableDesc, resolvedDbObjectNameDesc.copy(fieldName = "left")),
- "CREATETABLE_AS_SELECT",
+ CREATETABLE_AS_SELECT,
Seq(QueryDesc("query")))
}
val CommentOnTable = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CommentOnTable"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "ALTERTABLE_PROPERTIES")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_PROPERTIES)
}
val AppendDataV2 = {
@@ -237,7 +243,7 @@ object TableCommands {
val tableDesc =
TableDesc(
"table",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
@@ -248,7 +254,7 @@ object TableCommands {
val tableDesc =
TableDesc(
"table",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
@@ -264,7 +270,7 @@ object TableCommands {
val tableDesc =
TableDesc(
"table",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
@@ -277,85 +283,90 @@ object TableCommands {
val AddPartitions = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.AddPartitions"
// TODO: add column desc
- val tableDesc = TableDesc("table", "DataSourceV2RelationTableExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_ADDPARTS")
+ val tableDesc = TableDesc("table", classOf[DataSourceV2RelationTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_ADDPARTS)
}
val DropPartitions = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.DropPartitions"
// TODO: add column desc
- val tableDesc = TableDesc("table", "DataSourceV2RelationTableExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_DROPPARTS")
+ val tableDesc = TableDesc("table", classOf[DataSourceV2RelationTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_DROPPARTS)
}
val RenamePartitions = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.RenamePartitions"
// TODO: add column desc
- val tableDesc = TableDesc("table", "DataSourceV2RelationTableExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_RENAMEPART")
+ val tableDesc = TableDesc("table", classOf[DataSourceV2RelationTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_RENAMEPART)
}
val TruncatePartition = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.TruncatePartition"
// TODO: add column desc
- val tableDesc = TableDesc("table", "DataSourceV2RelationTableExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), "ALTERTABLE_DROPPARTS")
+ val tableDesc = TableDesc("table", classOf[DataSourceV2RelationTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_DROPPARTS)
}
val CacheTableAsSelect = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CacheTableAsSelect"
- TableCommandSpec(cmd, Nil, "CREATEVIEW", queryDescs = Seq(QueryDesc("plan")))
+ TableCommandSpec(cmd, Nil, CREATEVIEW, queryDescs = Seq(QueryDesc("plan")))
}
val CacheTable = {
val cmd = "org.apache.spark.sql.execution.command.CacheTableCommand"
- val queryDesc = QueryDesc("plan", "LogicalPlanOptionQueryExtractor")
- TableCommandSpec(cmd, Nil, "CREATEVIEW", queryDescs = Seq(queryDesc))
+ val queryDesc = QueryDesc("plan", classOf[LogicalPlanOptionQueryExtractor])
+ TableCommandSpec(cmd, Nil, CREATEVIEW, queryDescs = Seq(queryDesc))
}
val CacheTableV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.CacheTable"
- TableCommandSpec(cmd, Nil, "CREATEVIEW", Seq(QueryDesc("table")))
+ TableCommandSpec(cmd, Nil, CREATEVIEW, Seq(QueryDesc("table")))
}
val CreateView = {
val cmd = "org.apache.spark.sql.execution.command.CreateViewCommand"
val tableTypeDesc = TableTypeDesc(
"viewType",
- "ViewTypeTableTypeExtractor",
+ classOf[ViewTypeTableTypeExtractor],
Seq("TEMP_VIEW", "GLOBAL_TEMP_VIEW"))
val tableDesc = TableDesc(
"name",
- "TableIdentifierTableExtractor",
+ classOf[TableIdentifierTableExtractor],
tableTypeDesc = Some(tableTypeDesc))
val queryDesc1 = QueryDesc("plan")
val queryDesc2 = QueryDesc("child")
- TableCommandSpec(cmd, Seq(tableDesc), "CREATEVIEW", queryDescs = Seq(queryDesc1, queryDesc2))
+ TableCommandSpec(
+ cmd,
+ Seq(tableDesc),
+ CREATEVIEW,
+ queryDescs = Seq(queryDesc1, queryDesc2))
}
val CreateTempViewUsing = {
val cmd = "org.apache.spark.sql.execution.datasources.CreateTempViewUsing"
- TableCommandSpec(cmd, Nil, "CREATEVIEW")
+ TableCommandSpec(cmd, Nil, CREATEVIEW)
}
val CreateDataSourceTable = {
val cmd = "org.apache.spark.sql.execution.command.CreateDataSourceTableCommand"
- val tableDesc = TableDesc("table", "CatalogTableTableExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE")
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
}
val CreateDataSourceTableAsSelect = {
val cmd = "org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand"
CreateDataSourceTable.copy(
classname = cmd,
- opType = "CREATETABLE_AS_SELECT",
+ opType = CREATETABLE_AS_SELECT,
queryDescs = Seq(QueryDesc("query")))
}
val CreateHiveTableAsSelect = {
val cmd = "org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand"
- val columnDesc = ColumnDesc("outputColumnNames", "StringSeqColumnExtractor")
- val tableDesc = TableDesc("tableDesc", "CatalogTableTableExtractor", Some(columnDesc))
+ val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
+ val tableDesc =
+ TableDesc("tableDesc", classOf[CatalogTableTableExtractor], Some(columnDesc))
val queryDesc = QueryDesc("query")
TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE_AS_SELECT", queryDescs = Seq(queryDesc))
}
@@ -364,53 +375,56 @@ object TableCommands {
val cmd = "org.apache.spark.sql.execution.command.CreateTableLikeCommand"
val tableDesc1 = TableDesc(
"targetTable",
- "TableIdentifierTableExtractor",
+ classOf[TableIdentifierTableExtractor],
setCurrentDatabaseIfMissing = true)
val tableDesc2 = TableDesc(
"sourceTable",
- "TableIdentifierTableExtractor",
+ classOf[TableIdentifierTableExtractor],
isInput = true,
setCurrentDatabaseIfMissing = true)
- TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), "CREATETABLE")
+ TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE)
}
val DescribeColumn = {
val cmd = "org.apache.spark.sql.execution.command.DescribeColumnCommand"
- val columnDesc = ColumnDesc("colNameParts", "StringSeqLastColumnExtractor")
+ val columnDesc = ColumnDesc("colNameParts", classOf[StringSeqLastColumnExtractor])
val tableDesc = TableDesc(
"table",
- "TableIdentifierTableExtractor",
+ classOf[TableIdentifierTableExtractor],
Some(columnDesc),
isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "DESCTABLE")
+ TableCommandSpec(cmd, Seq(tableDesc), DESCTABLE)
}
val DescribeTable = {
val cmd = "org.apache.spark.sql.execution.command.DescribeTableCommand"
- val columnDesc = ColumnDesc("partitionSpec", "PartitionColumnExtractor")
+ val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionColumnExtractor])
val tableDesc = TableDesc(
"table",
- "TableIdentifierTableExtractor",
+ classOf[TableIdentifierTableExtractor],
Some(columnDesc),
isInput = true,
setCurrentDatabaseIfMissing = true)
- TableCommandSpec(cmd, Seq(tableDesc), "DESCTABLE")
+ TableCommandSpec(cmd, Seq(tableDesc), DESCTABLE)
}
val DropTable = {
val cmd = "org.apache.spark.sql.execution.command.DropTableCommand"
val tableTypeDesc =
- TableTypeDesc("tableName", "TableIdentifierTableTypeExtractor", Seq("TEMP_VIEW"))
+ TableTypeDesc(
+ "tableName",
+ classOf[TableIdentifierTableTypeExtractor],
+ Seq("TEMP_VIEW"))
TableCommandSpec(
cmd,
Seq(tableNameDesc.copy(tableTypeDesc = Some(tableTypeDesc))),
- "DROPTABLE")
+ DROPTABLE)
}
val DropTableV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.DropTable"
val tableDesc1 = resolvedTableDesc
- TableCommandSpec(cmd, Seq(tableDesc1), "DROPTABLE")
+ TableCommandSpec(cmd, Seq(tableDesc1), DROPTABLE)
}
val MergeIntoTable = {
@@ -418,7 +432,7 @@ object TableCommands {
val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
val tableDesc = TableDesc(
"targetTable",
- "DataSourceV2RelationTableExtractor",
+ classOf[DataSourceV2RelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
val queryDesc = QueryDesc("sourceTable")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
@@ -427,50 +441,52 @@ object TableCommands {
val ShowColumns = {
val cmd = "org.apache.spark.sql.execution.command.ShowColumnsCommand"
val tableDesc = tableNameDesc.copy(isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "SHOWCOLUMNS")
+ TableCommandSpec(cmd, Seq(tableDesc), SHOWCOLUMNS)
}
val ShowCreateTable = {
val cmd = "org.apache.spark.sql.execution.command.ShowCreateTableCommand"
val tableDesc = tableNameDesc.copy(fieldName = "table", isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "SHOW_CREATETABLE")
+ TableCommandSpec(cmd, Seq(tableDesc), SHOW_CREATETABLE)
}
val ShowTableProperties = {
val cmd = "org.apache.spark.sql.execution.command.ShowTablePropertiesCommand"
val tableDesc = tableNameDesc.copy(fieldName = "table", isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "SHOW_TBLPROPERTIES")
+ TableCommandSpec(cmd, Seq(tableDesc), SHOW_TBLPROPERTIES)
}
val ShowCreateTableV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable"
- val tableDesc = TableDesc("child", "ResolvedTableTableExtractor", isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "SHOW_CREATETABLE")
+ val tableDesc =
+ TableDesc("child", classOf[ResolvedTableTableExtractor], isInput = true)
+ TableCommandSpec(cmd, Seq(tableDesc), SHOW_CREATETABLE)
}
val ShowTablePropertiesV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.ShowTableProperties"
- val tableDesc = TableDesc("table", "ResolvedTableTableExtractor", isInput = true)
- TableCommandSpec(cmd, Seq(tableDesc), "SHOW_TBLPROPERTIES")
+ val tableDesc =
+ TableDesc("table", classOf[ResolvedTableTableExtractor], isInput = true)
+ TableCommandSpec(cmd, Seq(tableDesc), SHOW_TBLPROPERTIES)
}
val ShowPartitions = {
val cmd = "org.apache.spark.sql.execution.command.ShowPartitionsCommand"
- val columnDesc = ColumnDesc("spec", "PartitionOptionColumnExtractor")
+ val columnDesc = ColumnDesc("spec", classOf[PartitionOptionColumnExtractor])
val tableDesc = tableNameDesc.copy(isInput = true, columnDesc = Some(columnDesc))
- TableCommandSpec(cmd, Seq(tableDesc), "SHOWPARTITIONS")
+ TableCommandSpec(cmd, Seq(tableDesc), SHOWPARTITIONS)
}
val TruncateTable = {
val cmd = "org.apache.spark.sql.execution.command.TruncateTableCommand"
- val columnDesc = ColumnDesc("partitionSpec", "PartitionOptionColumnExtractor")
+ val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionOptionColumnExtractor])
val tableDesc = tableNameDesc.copy(columnDesc = Some(columnDesc))
- TableCommandSpec(cmd, Seq(tableDesc), "TRUNCATETABLE")
+ TableCommandSpec(cmd, Seq(tableDesc), TRUNCATETABLE)
}
val TruncateTableV2 = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.TruncateTable"
- TableCommandSpec(cmd, Seq(resolvedTableDesc), "TRUNCATETABLE")
+ TableCommandSpec(cmd, Seq(resolvedTableDesc), TRUNCATETABLE)
}
val InsertIntoDataSource = {
@@ -478,7 +494,7 @@ object TableCommands {
val actionTypeDesc = overwriteActionTypeDesc
val tableDesc = TableDesc(
"logicalRelation",
- "LogicalRelationTableExtractor",
+ classOf[LogicalRelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
@@ -486,10 +502,10 @@ object TableCommands {
val InsertIntoHiveTable = {
val cmd = "org.apache.spark.sql.hive.execution.InsertIntoHiveTable"
val actionTypeDesc = overwriteActionTypeDesc
- val columnDesc = ColumnDesc("outputColumnNames", "StringSeqColumnExtractor")
+ val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
val tableDesc = TableDesc(
"table",
- "CatalogTableTableExtractor",
+ classOf[CatalogTableTableExtractor],
Some(columnDesc),
Some(actionTypeDesc))
val queryDesc = QueryDesc("query")
@@ -505,7 +521,7 @@ object TableCommands {
val LoadData = {
val cmd = "org.apache.spark.sql.execution.command.LoadDataCommand"
val actionTypeDesc = overwriteActionTypeDesc.copy(fieldName = "isOverwrite")
- val columnDesc = ColumnDesc("partition", "PartitionOptionColumnExtractor")
+ val columnDesc = ColumnDesc("partition", classOf[PartitionOptionColumnExtractor])
val tableDesc = tableIdentDesc.copy(
fieldName = "table",
columnDesc = Some(columnDesc),
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala
new file mode 100644
index 000000000..7bb449469
--- /dev/null
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.plugin.spark.authz
+
+import scala.language.implicitConversions
+
+import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
+
+package object gen {
+ implicit def classSimpleName(clz: Class[_]): String = clz.getSimpleName
+
+ implicit def operationTypeStr(t: OperationType): String = t.toString
+}