You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by wa...@apache.org on 2024/03/29 02:51:58 UTC
(kyuubi) branch master updated: [KYUUBI #6215] Improve DropIgnoreNonexistent rule for Spark 3.5
This is an automated email from the ASF dual-hosted git repository.
wangzhen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new ad612349f [KYUUBI #6215] Improve DropIgnoreNonexistent rule for Spark 3.5
ad612349f is described below
commit ad612349fb5e04ed067ac6cd853f200c2e0d8835
Author: wforget <64...@qq.com>
AuthorDate: Fri Mar 29 10:51:46 2024 +0800
[KYUUBI #6215] Improve DropIgnoreNonexistent rule for Spark 3.5
# :mag: Description
## Issue References ๐
This pull request fixes #
## Describe Your Solution ๐ง
Improve DropIgnoreNonexistent rule for spark 3.5
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [X] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)
## Test Plan ๐งช
#### Behavior Without This Pull Request :coffin:
#### Behavior With This Pull Request :tada:
#### Related Unit Tests
DropIgnoreNonexistentSuite
---
# Checklist ๐
- [X] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6215 from wForget/hotfix2.
Closes #6215
cb1d34de1 [wforget] Improve DropIgnoreNonexistent rule for spark 3.5
Authored-by: wforget <64...@qq.com>
Signed-off-by: wforget <64...@qq.com>
---
.../org/apache/kyuubi/sql/DropIgnoreNonexistent.scala | 11 +++++++++--
.../org/apache/spark/sql/DropIgnoreNonexistentSuite.scala | 15 ++++++++++++++-
2 files changed, 23 insertions(+), 3 deletions(-)
diff --git a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
index e33632b8b..26b4b5b94 100644
--- a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
+++ b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
@@ -18,9 +18,9 @@ package org.apache.kyuubi.sql
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.analysis.{UnresolvedFunctionName, UnresolvedRelation}
-import org.apache.spark.sql.catalyst.plans.logical.{DropFunction, DropNamespace, LogicalPlan, NoopCommand, UncacheTable}
+import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.execution.command.{AlterTableDropPartitionCommand, DropTableCommand}
+import org.apache.spark.sql.execution.command.{AlterTableDropPartitionCommand, DropFunctionCommand, DropTableCommand}
import org.apache.kyuubi.sql.KyuubiSQLConf._
@@ -33,8 +33,15 @@ case class DropIgnoreNonexistent(session: SparkSession) extends Rule[LogicalPlan
i.copy(ifExists = true)
case i @ DropTableCommand(_, false, _, _) =>
i.copy(ifExists = true)
+ case i @ DropTable(_, false, _) =>
+ i.copy(ifExists = true)
case i @ DropNamespace(_, false, _) =>
i.copy(ifExists = true)
+ case i @ DropFunctionCommand(_, false, _) =>
+ i.copy(ifExists = true)
+ case i @ DropView(_, false) =>
+ i.copy(ifExists = true)
+ // refer: org.apache.spark.sql.catalyst.analysis.ResolveCommandsWithIfExists
case UncacheTable(u: UnresolvedRelation, false, _) =>
NoopCommand("UNCACHE TABLE", u.multipartIdentifier)
case DropFunction(u: UnresolvedFunctionName, false) =>
diff --git a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
index bbc61fb44..1899a1ef7 100644
--- a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
+++ b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
@@ -16,7 +16,7 @@
*/
package org.apache.spark.sql
-import org.apache.spark.sql.catalyst.plans.logical.{DropNamespace, NoopCommand}
+import org.apache.spark.sql.catalyst.plans.logical.{DropNamespace, DropTable, NoopCommand}
import org.apache.spark.sql.execution.command._
import org.apache.kyuubi.sql.KyuubiSQLConf
@@ -29,10 +29,23 @@ class DropIgnoreNonexistentSuite extends KyuubiSparkSQLExtensionTest {
val df1 = sql("DROP DATABASE nonexistent_database")
assert(df1.queryExecution.analyzed.asInstanceOf[DropNamespace].ifExists == true)
+ // drop nonexistent table
+ val df2 = sql("DROP TABLE nonexistent_table")
+ assert(df2.queryExecution.analyzed.asInstanceOf[DropTable].ifExists == true)
+
+ // drop nonexistent view
+ val df3 = sql("DROP VIEW nonexistent_view")
+ assert(df3.queryExecution.analyzed.asInstanceOf[DropTableCommand].isView == true &&
+ df3.queryExecution.analyzed.asInstanceOf[DropTableCommand].ifExists == true)
+
// drop nonexistent function
val df4 = sql("DROP FUNCTION nonexistent_function")
assert(df4.queryExecution.analyzed.isInstanceOf[NoopCommand])
+ // drop nonexistent temporary function
+ val df5 = sql("DROP TEMPORARY FUNCTION nonexistent_temp_function")
+ assert(df5.queryExecution.analyzed.asInstanceOf[DropFunctionCommand].ifExists == true)
+
// drop nonexistent PARTITION
withTable("test") {
sql("CREATE TABLE IF NOT EXISTS test(i int) PARTITIONED BY (p int)")