You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by bo...@apache.org on 2023/05/23 03:31:33 UTC
[kyuubi] branch master updated: [KYUUBI #4875] [AUTHZ] Remove checking Spark v2 in tests since Spark v2 not supported
This is an automated email from the ASF dual-hosted git repository.
bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 0456d14fa [KYUUBI #4875] [AUTHZ] Remove checking Spark v2 in tests since Spark v2 not supported
0456d14fa is described below
commit 0456d14fa8a65f89bea0a65c87249f241e614db7
Author: liangbowen <li...@gf.com.cn>
AuthorDate: Tue May 23 11:31:22 2023 +0800
[KYUUBI #4875] [AUTHZ] Remove checking Spark v2 in tests since Spark v2 not supported
### _Why are the changes needed?_
- remove assuming Spark v2 in Authz testing, since Spark v2 is marked not supported
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request
Closes #4875 from bowenliang123/authz-remove-spark2.
Closes #4875
6686a4d01 [liangbowen] remove checking spark v2
Authored-by: liangbowen <li...@gf.com.cn>
Signed-off-by: liangbowen <li...@gf.com.cn>
---
.../spark/authz/PrivilegesBuilderSuite.scala | 24 +++++-----------------
.../plugin/spark/authz/SparkSessionProvider.scala | 1 -
2 files changed, 5 insertions(+), 20 deletions(-)
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index af4a7c262..340b34fc0 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -367,7 +367,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po.actionType === PrivilegeObjectActionType.OTHER)
assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po.catalog.isEmpty)
- assert(po.dbname === (if (isSparkV2) null else defaultDb))
+ assert(po.dbname === defaultDb)
assert(po.objectName === "AlterViewAsCommand")
checkTableOwner(po)
assert(po.columns.isEmpty)
@@ -523,7 +523,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po.actionType === PrivilegeObjectActionType.OTHER)
assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po.catalog.isEmpty)
- assert(po.dbname === (if (isSparkV2) null else defaultDb))
+ assert(po.dbname === defaultDb)
assert(po.objectName === "CreateViewCommand")
assert(po.columns.isEmpty)
val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -543,7 +543,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po.actionType === PrivilegeObjectActionType.OTHER)
assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po.catalog.isEmpty)
- assert(po.dbname === (if (isSparkV2) null else defaultDb))
+ assert(po.dbname === defaultDb)
assert(po.objectName === tableName)
assert(po.columns.isEmpty)
val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -958,7 +958,6 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
}
test("Query: CTE") {
- assume(!isSparkV2)
checkColumns(
s"""
|with t(c) as (select coalesce(max(key), pid, 1) from $reusedPartTable group by pid)
@@ -1230,7 +1229,6 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
}
test("AlterTableChangeColumnCommand") {
- assume(!isSparkV2)
val plan = sql(s"ALTER TABLE $reusedTable" +
s" ALTER COLUMN value COMMENT 'alter column'").queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
@@ -1298,7 +1296,7 @@ class InMemoryPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
assert(po.actionType === PrivilegeObjectActionType.OTHER)
assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po.catalog.isEmpty)
- assert(po.dbname === (if (isSparkV2) null else defaultDb))
+ assert(po.dbname === defaultDb)
assert(po.objectName === "CreateDataSourceTableAsSelectCommand")
if (catalogImpl == "hive") {
assert(po.columns === Seq("key", "value"))
@@ -1312,10 +1310,9 @@ class InMemoryPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
- override protected val catalogImpl: String = if (isSparkV2) "in-memory" else "hive"
+ override protected val catalogImpl: String = "hive"
test("AlterTableSerDePropertiesCommand") {
- assume(!isSparkV2)
withTable("AlterTableSerDePropertiesCommand") { t =>
sql(s"CREATE TABLE $t (key int, pid int) USING hive PARTITIONED BY (pid)")
sql(s"ALTER TABLE $t ADD IF NOT EXISTS PARTITION (pid=1)")
@@ -1340,7 +1337,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("CreateTableCommand") {
- assume(!isSparkV2)
withTable("CreateTableCommand") { _ =>
val plan = sql(s"CREATE TABLE CreateTableCommand(a int, b string) USING hive")
.queryExecution.analyzed
@@ -1361,7 +1357,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("CreateHiveTableAsSelectCommand") {
- assume(!isSparkV2)
val plan = sql(s"CREATE TABLE CreateHiveTableAsSelectCommand USING hive" +
s" AS SELECT key, value FROM $reusedTable")
.queryExecution.analyzed
@@ -1392,7 +1387,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("LoadDataCommand") {
- assume(!isSparkV2)
val dataPath = getClass.getClassLoader.getResource("data.txt").getPath
val tableName = reusedDb + "." + "LoadDataToTable"
withTable(tableName) { _ =>
@@ -1422,7 +1416,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoDatasourceDirCommand") {
- assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
val plan = sql(
@@ -1448,7 +1441,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoDataSourceCommand") {
- assume(!isSparkV2)
val tableName = "InsertIntoDataSourceTable"
withTable(tableName) { _ =>
// sql(s"CREATE TABLE $tableName (a int, b string) USING parquet")
@@ -1507,7 +1499,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoHadoopFsRelationCommand") {
- assume(!isSparkV2)
val tableName = "InsertIntoHadoopFsRelationTable"
withTable(tableName) { _ =>
sql(s"CREATE TABLE $tableName (a int, b string) USING parquet")
@@ -1549,7 +1540,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoDataSourceDirCommand") {
- assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
val plan = sql(
@@ -1575,7 +1565,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoHiveDirCommand") {
- assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
val plan = sql(
@@ -1601,7 +1590,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("InsertIntoHiveTableCommand") {
- assume(!isSparkV2)
val tableName = "InsertIntoHiveTable"
withTable(tableName) { _ =>
sql(s"CREATE TABLE $tableName (a int, b string) USING hive")
@@ -1631,7 +1619,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("ShowCreateTableAsSerdeCommand") {
- assume(!isSparkV2)
withTable("ShowCreateTableAsSerdeCommand") { t =>
sql(s"CREATE TABLE $t (key int, pid int) USING hive PARTITIONED BY (pid)")
val plan = sql(s"SHOW CREATE TABLE $t AS SERDE").queryExecution.analyzed
@@ -1653,7 +1640,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
}
test("OptimizedCreateHiveTableAsSelectCommand") {
- assume(!isSparkV2)
val plan = sql(
s"CREATE TABLE OptimizedCreateHiveTableAsSelectCommand STORED AS parquet AS SELECT 1 as a")
.queryExecution.analyzed
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index 6b1087930..232ef1b74 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -32,7 +32,6 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
trait SparkSessionProvider {
protected val catalogImpl: String
protected def format: String = if (catalogImpl == "hive") "hive" else "parquet"
- protected val isSparkV2: Boolean = isSparkVersionAtMost("2.4")
protected val isSparkV31OrGreater: Boolean = isSparkVersionAtLeast("3.1")
protected val isSparkV32OrGreater: Boolean = isSparkVersionAtLeast("3.2")
protected val isSparkV33OrGreater: Boolean = isSparkVersionAtLeast("3.3")